In [1]:
import numpy as np
import matplotlib.pyplot as plt
import itertools
import math
In [2]:
def calculate_copeland(node, nodes, edges, directions):
    copeland_score = 0
    for node2 in nodes:
        if node == node2:
            continue
        if node2 > node:
            if directions[edges[(node, node2)]] == "0":
                copeland_score += 1
        elif directions[edges[(node2, node)]] == "1":
            copeland_score += 1
    return copeland_score


def calculate_max_copeland(nodes, edges, directions):
    return max(calculate_copeland(node, nodes, edges, directions) for node in nodes)


def run_se(nodes, edges, directions):
    """Returns Copeland score of winner on the seeding given by nodes
    Edges give indices into directions, and directions is a string consisting of 0s and 1s
    (0 indicates that the first item in the pair wins)
    """
    orig_nodes = nodes
    
    # Run SE
    while len(nodes) > 1:
        next_level = []
        for i in range(0, len(nodes), 2):
            if i + 1 == len(nodes):
                next_level.append(nodes[i])
            else:
                u, v = nodes[i], nodes[i + 1]
                if u > v:
                    u, v = v, u
                if directions[edges[(u, v)]] == "0":
                    next_level.append(u)
                else:
                    next_level.append(v)
        nodes = next_level
    
    return calculate_copeland(nodes[0], orig_nodes, edges, directions)


def simulate_all(n):
    """Returns an np array E[d+(F(T))]/(max_{s in S} d+(s)) for every tournament, T, on n nodes
    """
    nodes = list(range(n))
    edges = {}
    num_edges = 0
    for i in range(len(nodes)):
        for j in range(i + 1, len(nodes)):
            u = nodes[i]
            v = nodes[j]
            edges[(u, v)] = num_edges
            num_edges += 1
    
    res = []
    format_string = f"{{:0{num_edges}b}}"
    for tournament in range(2 ** num_edges):
        directions = format_string.format(tournament)
        num_seedings = 0
        total = 0
        for seeding in itertools.permutations(nodes):
            total += run_se(seeding, edges, directions)
            num_seedings += 1
        expected_winner_score = total / num_seedings
        max_copeland_score = calculate_max_copeland(nodes, edges, directions)
        res.append(expected_winner_score / max_copeland_score)
    return np.array(res)
In [3]:
def theoretical_min(n):
    return math.log2(n) / (n - 2)
In [4]:
def simulate_visual(num_nodes):
    the_min = theoretical_min(num_nodes)
    responses = simulate_all(num_nodes)
    fig, ax = plt.subplots()
    ax.plot(np.sort(responses))
    ax.hlines([the_min, np.mean(responses)], 0, len(responses) - 1, color="red")
    ax.set_title(f"$n = {num_nodes}$")
    print(the_min, np.mean(responses))
    plt.show()
In [63]:
simulate_visual(5)
0.7739760316291208 0.9036458333333335
In [83]:
simulate_visual(6)
0.646240625180289 0.9285481770833334
In [7]:
import random
def simulate_sample_both(n, n_tournaments, n_seedings):
    """Returns an np array E[d+(F(T))]/(max_{s in S} d+(s)) for n_tournaments tournaments, T, on n nodes
    with n_seedings per tournament
    """
    np.random.seed(0)
    random.seed(0)
    
    nodes = list(range(n))
    edges = {}
    num_edges = 0
    for i in range(len(nodes)):
        for j in range(i + 1, len(nodes)):
            u = nodes[i]
            v = nodes[j]
            edges[(u, v)] = num_edges
            num_edges += 1
    
    res = []
    for tournament in range(n_tournaments):
        directions = "".join(random.choices("01", k=num_edges))
        num_seedings = 0
        total = 0
        for seeding in range(n_seedings):
            total += run_se(np.random.permutation(nodes), edges, directions)
            num_seedings += 1
        expected_winner_score = total / num_seedings
        max_copeland_score = calculate_max_copeland(nodes, edges, directions)
        res.append(expected_winner_score / max_copeland_score)
    return np.array(res)

def simulate_visual_sample_both(num_nodes, n_tournaments, n_seedings):
    the_min = theoretical_min(num_nodes)
    responses = simulate_sample_both(num_nodes, n_tournaments, n_seedings)
    fig, ax = plt.subplots()
    ax.plot(np.sort(responses))
    ax.hlines([the_min, np.mean(responses)], 0, len(responses) - 1, color="red")
    ax.set_title(f"$n = {num_nodes}$")
    print(the_min, np.min(responses), np.mean(responses))
    plt.show()
In [9]:
for i in range(5, 200):
    simulate_visual_sample_both(i, 2000, 100)
0.7739760316291208 0.7000000000000001 0.9024350000000001
0.646240625180289 0.7775 0.9257958333333334
0.5614709844115209 0.79 0.91982825
0.5 0.778 0.9123719166666667
0.45284642877747316 0.715 0.8470238333333334
0.4152410118609203 0.7542857142857143 0.8720693214285714
0.3843812909596997 0.7525 0.8730926488095238
0.3584962500721156 0.75875 0.8768149503968254
0.3364036107400993 0.7544444444444445 0.8651854437229438
0.3172795768381337 0.755 0.8695323358585858
0.3005300458160399 0.7681818181818181 0.8681589466783217
0.2857142857142857 0.7558333333333334 0.8664160711788212
0.2724975227500226 0.7069230769230769 0.8156824293761794
0.2606203125901445 0.7215384615384616 0.8340318576839827
0.24987808902609324 0.7392857142857142 0.8398959058111006
0.24010711638263127 0.7493333333333333 0.8447249566671073
0.23117460119888214 0.7482352941176471 0.8428553095573587
0.22297158093186487 0.7441176470588236 0.8471022298220571
0.2154077121931911 0.7629411764705882 0.8492074017110556
0.20840738639641618 0.7444444444444445 0.8485681078185661
0.20190679085977062 0.7455555555555555 0.8390043512239824
0.1958516549225455 0.7442105263157895 0.8434099340480818
0.19019550008653874 0.7390476190476191 0.8445047674385943
0.18489826623298475 0.7571428571428571 0.8479371534905256
0.17992522204176192 0.7536363636363635 0.845214157702076
0.17524609270030425 0.7417391304347826 0.8477473850314241
0.1708343555305819 0.7321739130434782 0.8463291510555869
0.16666666666666666 0.7559999999999999 0.8468726933331033
0.16272239094704688 0.7180769230769232 0.8115990321938953
0.1589832137890731 0.7192307692307692 0.821755600922681
0.155432818695302 0.7196153846153847 0.8264383374781974
0.15205661768947976 0.7348148148148148 0.8318733972960961
0.14884152473225573 0.7286666666666667 0.8315953672810551
0.1457757642623218 0.7420689655172413 0.8350856541997793
0.1428487086178986 0.7464285714285713 0.8368331583470053
0.1400507393391411 0.7296666666666667 0.8389712459286727
0.13737312832354062 0.7363333333333333 0.832311958390447
0.13480793556946902 0.7370967741935485 0.8367486706878092
0.13234792084639263 0.734516129032258 0.8388075628889289
0.12998646711041184 0.744375 0.8414047508000958
0.12771751386813196 0.7312121212121212 0.8406468202470015
0.12553549900129576 0.7455882352941177 0.8421976996371204
0.12343530781505861 0.7097297297297298 0.8415597666196573
0.12141222827654687 0.7494285714285714 0.8432420323409608
0.11946191157691932 0.7278947368421053 0.8346012904346443
0.11758033728697342 0.7228947368421053 0.8393196629540985
0.11576378248921419 0.72875 0.841194791220923
0.11400879436282184 0.7381578947368421 0.8419322740600478
0.112312165775749 0.7262500000000001 0.8411969422205444
0.11067091350314362 0.7495 0.8433453259358967
0.1090822587457483 0.7504878048780488 0.8447450445392651
0.10754360966773341 0.733095238095238 0.8435080184016391
0.10605254571208621 0.7395121951219512 0.8424503728027971
0.10460680348442093 0.7306976744186047 0.8435512744620316
0.10320426402389196 0.737906976744186 0.8445796130782914
0.10184294130359516 0.7420454545454546 0.844989579564215
0.10052097182309977 0.7431111111111111 0.844223389355494
0.09923660517311458 0.7467391304347827 0.8453867718999938
0.09798819546721176 0.7445833333333334 0.8460569958441289
0.0967741935483871 0.7525531914893616 0.8468644549146697
0.09559313988934054 0.730204081632653 0.8233041315731634
0.09444365811497583 0.7247916666666666 0.8289080341329066
0.09332444908396573 0.7214285714285714 0.8304081566999518
0.09223428547348998 0.71 0.8334029134353342
0.09117200681758461 0.7394 0.834075558836545
0.09013651495507304 0.723921568627451 0.8360683470096268
0.08912676984789394 0.7338181818181818 0.8378325262112032
0.08814178573489018 0.726 0.8404998031281219
0.08718062758985941 0.718421052631579 0.8372165078780983
0.08624240785595765 0.7459615384615385 0.8402352180775681
0.08532628343145042 0.7474999999999999 0.8413421237193848
0.08443145288437277 0.7503773584905661 0.8433379874770517
0.08355715387593202 0.746 0.8422342813659365
0.08270266077450328 0.7412280701754386 0.8445590565564892
0.08186728244385848 0.7596363636363637 0.8451557613676105
0.08105036019086362 0.7434999999999999 0.8456004015021874
0.08025126585929904 0.7410344827586206 0.8416265942812563
0.07946940005772604 0.7468333333333333 0.8426186272304137
0.07870419051045585 0.7590322580645161 0.8444943441059201
0.07795509052169221 0.7344262295081967 0.8465920322491048
0.07722157754382773 0.7401639344262295 0.8449265653091971
0.07650315184169164 0.7483870967741936 0.84693865145226
0.07579933524527915 0.7615625 0.8479237935261904
0.07510966998415462 0.7572580645161291 0.8485438701025032
0.07443371759731492 0.7420634920634921 0.8476679264656868
0.07377105791283721 0.7281818181818183 0.8484061963549545
0.07312128809212018 0.7581818181818182 0.8481716700002536
0.07248402173396681 0.761875 0.8502083083530652
0.07185888803415419 0.7528358208955224 0.8504408874054893
0.07124553099649607 0.7538805970149254 0.8507650077243832
0.07064360869173063 0.7509859154929578 0.8516508605516574
0.07005279256086336 0.7508450704225352 0.8515608249097676
0.06947276675986451 0.7567164179104479 0.8452326708603263
0.06890322754286675 0.7530882352941176 0.8479349300915854
0.06834388268123309 0.753 0.849398979492596
0.06779445091606862 0.7394285714285714 0.8495202680480234
0.06725466144193731 0.7556338028169014 0.8494983267128601
0.06672425341971495 0.7455555555555555 0.8505928530635031
0.06620297551666553 0.7485526315789474 0.8524264049626022
0.06569058547197149 0.763013698630137 0.852642502267873
0.06518684968607887 0.7527397260273972 0.8513932070992876
0.06469154283233845 0.7617333333333334 0.8523853613751635
0.06420444748953474 0.7353947368421053 0.8530081587607485
0.06372535379399498 0.7683333333333333 0.8538081137809193
0.06325405911006472 0.7574324324324324 0.8527059363564051
0.06279036771782093 0.7627272727272727 0.8544622326526126
0.06233409051697345 0.7661038961038962 0.8541597453787626
0.06188504474597822 0.7582894736842105 0.8543964875007798
0.06144305371545214 0.7576923076923077 0.8526267695927616
0.06100794655504233 0.7588607594936709 0.8533777639504934
0.06057955797295907 0.7503703703703704 0.854419633616816
0.06015772802743484 0.763375 0.8555470719468914
0.05974230190942091 0.7634177215189873 0.8542927454407406
0.05933312973587795 0.7675903614457832 0.8557975071135608
0.05893006635305935 0.74 0.8560669199013894
0.05853297114922473 0.7691566265060241 0.8557653199529381
0.058141707876257095 0.7447674418604651 0.8564611306970341
0.05775614447969072 0.7620238095238095 0.8565793474642381
0.057376152936687935 0.7531395348837209 0.8568144336808794
0.057001609101531764 0.7602325581395348 0.8564746194470784
0.05663239255822835 0.7581176470588235 0.8573416618046256
0.05626838647983804 0.7552808988764045 0.8576497260333317
0.05590947749417732 0.7571590909090908 0.8582872194258501
0.05555555555555555 0.7701162790697675 0.8573931472694443
0.055206513822230345 0.7565517241379309 0.8416370726813791
0.0548622485392848 0.7555555555555555 0.8462820587453607
0.0545226589266469 0.7576666666666666 0.8478181083032176
0.0541876470719881 0.753763440860215 0.848049780211134
0.05385711782825336 0.7617777777777778 0.849937821237662
0.053530978715589185 0.7627956989247312 0.851046526170434
0.053209139827449854 0.7695555555555557 0.8529349359197265
0.05289151374067417 0.762258064516129 0.8538997400970725
0.05257801542933723 0.756063829787234 0.8521701193872485
0.05226856218219242 0.7709677419354839 0.8529061264834645
0.05196307352352925 0.7635483870967742 0.8544160498168026
0.051661471137282367 0.7735483870967742 0.8545538480110866
0.05136367879423592 0.7373469387755103 0.855914779357162
0.0510696222821763 0.7773684210526315 0.856593350766936
0.05077922933885382 0.7660416666666667 0.8568574175086483
0.050492429587621915 0.7675510204081633 0.8575311063504507
0.05020915447562891 0.7732291666666667 0.8546201907249498
0.049929337214444564 0.7726041666666666 0.8562553459989369
0.04965291272300941 0.765959595959596 0.857203134768815
0.04937981757280103 0.7436893203883495 0.857464145717585
0.04910998993511674 0.7753 0.8577997118290852
0.04884336953037757 0.763960396039604 0.8582628264503833
0.04857989757936294 0.776930693069307 0.8594518079254615
0.048319516756290565 0.7733333333333333 0.8598597285501085
0.048062171143659946 0.7706796116504854 0.8597592371368615
0.04780780618878224 0.7691428571428572 0.8597083156233606
0.04755636866192312 0.7742452830188679 0.8609390486458155
0.04730780661598863 0.7680188679245282 0.8611935883003007
0.04706206934768792 0.7597222222222222 0.8602241705521692
0.046819107360109635 0.7698130841121495 0.8622255678304571
0.04657887232665195 0.7785981308411215 0.8624231278304466
0.04634131705624913 0.7604545454545455 0.8620913288646824
0.046106395459840355 0.7755140186915889 0.8581103045376307
0.045874062518028905 0.7742990654205607 0.8606712117749754
0.04564427424988247 0.7713513513513514 0.8602135577747801
0.045416987682827675 0.7691964285714287 0.8607288568740078
0.045192160823593966 0.7663963963963963 0.8607105912950326
0.04496975263016417 0.7653097345132743 0.8624449623369305
0.04474972298469122 0.7666371681415929 0.8622870580405745
0.04453203266734193 0.773421052631579 0.8621890193954662
0.044316643331031046 0.783873873873874 0.8629064410976255
0.04410351747701013 0.7740517241379311 0.8638100122602611
0.0438926184312775 0.7884821428571429 0.8627635234181589
0.04368391032177705 0.7771551724137932 0.863494589244626
0.043477358056355116 0.7650833333333333 0.8645236888011955
0.04327292730144609 0.7751724137931034 0.8648004753079559
0.04307058446145855 0.7855652173913044 0.8647517972559599
0.042870296658835044 0.768 0.8648815398943657
0.042672031714759986 0.7941379310344828 0.8639191147570806
0.0424757581304909 0.7826050420168067 0.8641902115395328
0.04228144506928958 0.781880341880342 0.8649097995687327
0.042089062338930755 0.7688524590163934 0.8648535009525766
0.041898580374766514 0.7857983193277311 0.865644559874979
0.04170997022332609 0.7963865546218487 0.8661403758135594
0.04152320352643118 0.7791056910569105 0.8663937267918457
0.04133825250580776 0.7856666666666666 0.8656016879331626
0.04115508994817657 0.7921487603305785 0.8656695427747538
0.04097368919080452 0.7879674796747967 0.8662441435032786
0.040794024107500736 0.7813709677419355 0.8675825904354276
0.04061606909504106 0.7865322580645161 0.867668349665961
0.040439799060005735 0.7884677419354839 0.8673031577602821
0.04026518940601568 0.7973770491803279 0.8682953627104006
0.04009221602135317 0.7824603174603175 0.867888073532753
0.03992085526695345 0.7953600000000001 0.868926964466082
0.039751083964754345 0.7753125 0.8636503677128601
0.03958287938639129 0.776060606060606 0.8645482014947946
0.039416219242225964 0.77671875 0.8659349693290564
0.03925108167069695 0.7844186046511628 0.8658430397251415
0.03908744522798142 0.787890625 0.8666482409380115
0.03892528887795719 0.79515625 0.8668379495614249
0.03876459198245507 0.7783458646616541 0.8680729394225236